library(rethinking)
Loading required package: rstan
Loading required package: StanHeaders
Loading required package: ggplot2
rstan (Version 2.21.2, GitRev: 2e1f913d3ca3)
For execution on a local, multicore CPU with excess RAM we recommend calling
options(mc.cores = parallel::detectCores()).
To avoid recompilation of unchanged Stan programs, we recommend calling
rstan_options(auto_write = TRUE)
Loading required package: parallel
rethinking (Version 2.11)

Attaching package: ‘rethinking’

The following object is masked from ‘package:stats’:

    rstudent
library(brms)
Loading required package: Rcpp
Registered S3 method overwritten by 'htmlwidgets':
  method           from         
  print.htmlwidget tools:rstudio
Loading 'brms' package (version 2.13.5). Useful instructions
can be found by typing help('brms'). A more detailed introduction
to the package is available through vignette('brms_overview').

Attaching package: ‘brms’

The following objects are masked from ‘package:rethinking’:

    LOO, stancode, WAIC

The following object is masked from ‘package:rstan’:

    loo

The following object is masked from ‘package:stats’:

    ar
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
── Attaching packages ────────────────────── tidyverse 1.3.0 ──
✓ tibble  3.0.3     ✓ dplyr   1.0.0
✓ tidyr   1.1.0     ✓ stringr 1.4.0
✓ readr   1.3.1     ✓ forcats 0.5.0
✓ purrr   0.3.4     
── Conflicts ───────────────────────── tidyverse_conflicts() ──
x tidyr::extract() masks rstan::extract()
x dplyr::filter()  masks stats::filter()
x dplyr::lag()     masks stats::lag()
x purrr::map()     masks rethinking::map()
germ <- read_csv("../Dimensions/hydrothermal-all-species/data/light_round1_tall.csv") %>% filter(wps==0) %>%
  select(pops, temps, total_seeds, germ, day, cumulative_germ)
Parsed with column specification:
cols(
  pops = col_character(),
  temps = col_double(),
  wps = col_double(),
  date = col_character(),
  total_seeds = col_double(),
  germ = col_double(),
  start_date = col_date(format = ""),
  census_date = col_date(format = ""),
  day = col_double(),
  cumulative_germ = col_double(),
  cumulative_prop_germ = col_double()
)
germ

what if need one event per row:

one_per_row <- function(df) {
  total_seed <- max(df$total_seeds, sum(df$germ))
  newdata <- tibble(id=1:total_seed, germ=0, day=max(df$day))
  df <- df %>% filter(germ>0)
  count <- 1
  if (nrow(df) > 0) {
    for (i in 1:nrow(df)) { # we look at each row of the df where germination occured
      for (j in 1:df$germ[i]) { # now update the newdata to reflect the germiantion of each seed
        newdata$germ[count] <- 1
        newdata$day[count]=df$day[i]
        count <- count+1 # count keeps track of which individual we are at in the new data
      } # for j
    } # for i
  } # if 
  return(newdata)
}

germone <- germ %>% group_by(pops, temps) %>%
  select(-cumulative_germ) %>% # not needed in this encoding (I think...in any case would need to be recalculated)
  nest() %>%
  mutate(newdata=map(data, one_per_row)) %>%
  select(-data) %>%
  unnest(newdata)

germone

##1

Is effect of temp ~linear on cum germ?

germ %>% filter(pops=="STDI", day==28) %>%
  ggplot(aes(x=temps,y=cumulative_germ)) +
  geom_col()

no, so we can’t treat temp as a continuous linear predictor

germ.stdi <- germone %>% filter(pops=="STDI") %>% select(-pops)
Adding missing grouping variables: `pops`
germ.stdi

Try a censored lambda model

d <- list(germ=germ.stdi$germ, temps=as.numeric(as.factor(germ.stdi$temps)), day=germ.stdi$day)
m1.1 <- ulam(
  alist(
    day | germ==1 ~ exponential( lambda),
    day | germ==0 ~ custom(exponential_lccdf( !Y | lambda)),
    lambda <- 1.0 / mu,
    log(mu) <- a[temps],
    a[temps] ~ normal(0,1)),
  data=d,
  chains=4,
  cores = 4
)
starting worker pid=69793 on localhost:11919 at 10:13:50.484
starting worker pid=69807 on localhost:11919 at 10:13:50.767
starting worker pid=69821 on localhost:11919 at 10:13:51.046
starting worker pid=69835 on localhost:11919 at 10:13:51.330

SAMPLING FOR MODEL '6c7ed290b113c205c3adee6a5a6dddff' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 7.3e-05 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.73 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:   1 / 1000 [  0%]  (Warmup)
Chain 1: Iteration: 100 / 1000 [ 10%]  (Warmup)

SAMPLING FOR MODEL '6c7ed290b113c205c3adee6a5a6dddff' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 8.8e-05 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.88 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:   1 / 1000 [  0%]  (Warmup)
Chain 1: Iteration: 200 / 1000 [ 20%]  (Warmup)
Chain 2: Iteration: 100 / 1000 [ 10%]  (Warmup)
Chain 1: Iteration: 300 / 1000 [ 30%]  (Warmup)
Chain 1: Iteration: 400 / 1000 [ 40%]  (Warmup)
Chain 2: Iteration: 200 / 1000 [ 20%]  (Warmup)
Chain 2: Iteration: 300 / 1000 [ 30%]  (Warmup)
Chain 1: Iteration: 500 / 1000 [ 50%]  (Warmup)
Chain 1: Iteration: 501 / 1000 [ 50%]  (Sampling)
Chain 2: Iteration: 400 / 1000 [ 40%]  (Warmup)
Chain 1: Iteration: 600 / 1000 [ 60%]  (Sampling)
Chain 2: Iteration: 500 / 1000 [ 50%]  (Warmup)
Chain 2: Iteration: 501 / 1000 [ 50%]  (Sampling)
Chain 1: Iteration: 700 / 1000 [ 70%]  (Sampling)
Chain 1: Iteration: 800 / 1000 [ 80%]  (Sampling)

SAMPLING FOR MODEL '6c7ed290b113c205c3adee6a5a6dddff' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 7.9e-05 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.79 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:   1 / 1000 [  0%]  (Warmup)
Chain 2: Iteration: 600 / 1000 [ 60%]  (Sampling)
Chain 2: Iteration: 700 / 1000 [ 70%]  (Sampling)
Chain 1: Iteration: 900 / 1000 [ 90%]  (Sampling)
Chain 3: Iteration: 100 / 1000 [ 10%]  (Warmup)
Chain 2: Iteration: 800 / 1000 [ 80%]  (Sampling)

SAMPLING FOR MODEL '6c7ed290b113c205c3adee6a5a6dddff' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 8.8e-05 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.88 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:   1 / 1000 [  0%]  (Warmup)
Chain 1: Iteration: 1000 / 1000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 0.304613 seconds (Warm-up)
Chain 1:                0.302696 seconds (Sampling)
Chain 1:                0.607309 seconds (Total)
Chain 1: 
Chain 3: Iteration: 200 / 1000 [ 20%]  (Warmup)
Chain 2: Iteration: 900 / 1000 [ 90%]  (Sampling)
Chain 4: Iteration: 100 / 1000 [ 10%]  (Warmup)
Chain 2: Iteration: 1000 / 1000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 0.276614 seconds (Warm-up)
Chain 2:                0.239425 seconds (Sampling)
Chain 2:                0.516039 seconds (Total)
Chain 2: 
Chain 3: Iteration: 300 / 1000 [ 30%]  (Warmup)
Chain 3: Iteration: 400 / 1000 [ 40%]  (Warmup)
Chain 4: Iteration: 200 / 1000 [ 20%]  (Warmup)
Chain 3: Iteration: 500 / 1000 [ 50%]  (Warmup)
Chain 3: Iteration: 501 / 1000 [ 50%]  (Sampling)
Chain 4: Iteration: 300 / 1000 [ 30%]  (Warmup)
Chain 3: Iteration: 600 / 1000 [ 60%]  (Sampling)
Chain 4: Iteration: 400 / 1000 [ 40%]  (Warmup)
Chain 3: Iteration: 700 / 1000 [ 70%]  (Sampling)
Chain 4: Iteration: 500 / 1000 [ 50%]  (Warmup)
Chain 4: Iteration: 501 / 1000 [ 50%]  (Sampling)
Chain 3: Iteration: 800 / 1000 [ 80%]  (Sampling)
Chain 4: Iteration: 600 / 1000 [ 60%]  (Sampling)
Chain 4: Iteration: 700 / 1000 [ 70%]  (Sampling)
Chain 3: Iteration: 900 / 1000 [ 90%]  (Sampling)
Chain 4: Iteration: 800 / 1000 [ 80%]  (Sampling)
Chain 3: Iteration: 1000 / 1000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 0.309808 seconds (Warm-up)
Chain 3:                0.272335 seconds (Sampling)
Chain 3:                0.582143 seconds (Total)
Chain 3: 
Chain 4: Iteration: 900 / 1000 [ 90%]  (Sampling)
Chain 4: Iteration: 1000 / 1000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 0.302916 seconds (Warm-up)
Chain 4:                0.227271 seconds (Sampling)
Chain 4:                0.530187 seconds (Total)
Chain 4: 
precis(m1.1, depth = 2)

The above represent log(mean time to germination)

exp(5.45)
[1] 232.7582
exp(2.48)
[1] 11.94126

posterior

preddata <- expand_grid(temps=1:8, day=1:28)
pred <- link(m1.1, data = preddata)
str(pred)
List of 2
 $ lambda: num [1:2000, 1:224] 0.00427 0.00607 0.00343 0.00467 0.00443 ...
 $ mu    : num [1:2000, 1:224] 234 165 291 214 226 ...

mu is average days to germ, lambda is rate parameter. neither change over time, of course, so having days doesn’t really make sense.

preddata$mu <- apply(pred$mu,2,mean)
preddata$low <- apply(pred$mu,2,HPDI)[1,]
preddata$high <- apply(pred$mu, 2, HPDI)[2,]

Single temp. Don’t need day posterior

preddata <- expand_grid(temps=3)
pred <- link(m1.1, data = preddata)
str(pred)
List of 2
 $ lambda: num [1:2000, 1] 0.0182 0.0146 0.019 0.0195 0.0143 ...
 $ mu    : num [1:2000, 1] 54.9 68.7 52.6 51.2 70.1 ...

how to convert to probs? use pexp.

predprobs <- pexp(1:28,rate=pred$lambda[1])
plot(x=1:28,y=predprobs, type="l")

even thought it isn’t using day, including day in the prediction data frame will help me keep data in the correct format. Maybe.

preddata <- expand_grid(temps=1:8, day=1:28)
Warning message:
Unknown or uninitialised column: `germ`. 
pred <- link(m1.1, data = preddata)
str(pred)
List of 2
 $ lambda: num [1:2000, 1:224] 0.00439 0.00507 0.00342 0.00459 0.00509 ...
 $ mu    : num [1:2000, 1:224] 228 197 293 218 197 ...
predresults <- preddata %>%
  mutate(lambda=as.list(as.data.frame(pred$lambda)))
predresults
predresults <- predresults %>%
Warning messages:
1: Unknown or uninitialised column: `germ`. 
2: Unknown or uninitialised column: `germ`. 
  mutate(probs=map2(day, lambda, ~ pexp(.x, .y)),
         mu=map_dbl(probs, mean),
         lower=map_dbl(probs, ~ HPDI(.)[1] %>% unlist()),
         upper=map_dbl(probs, ~ HPDI(.)[2]) %>% unlist())
predresults
predresults %>% select(-lambda, -probs) %>%
Warning messages:
1: Unknown or uninitialised column: `germ`. 
2: Unknown or uninitialised column: `germ`. 
3: Unknown or uninitialised column: `germ`. 
4: Unknown or uninitialised column: `germ`. 
5: Unknown or uninitialised column: `germ`. 
6: Unknown or uninitialised column: `germ`. 
7: Unknown or uninitialised column: `germ`. 
8: Unknown or uninitialised column: `germ`. 
9: Unknown or uninitialised column: `germ`. 
  mutate(temps=factor(temps, labels=as.character(sort(unique(germ.stdi$temps))))) %>%
  ggplot(aes(x=day,y=mu,color=temps,group=temps)) +
  geom_line() 

Add realdata:

stdi.plot <- germ %>% filter(pops=="STDI") %>% 
Warning messages:
1: Unknown or uninitialised column: `germ`. 
2: Unknown or uninitialised column: `germ`. 
3: Unknown or uninitialised column: `germ`. 
  select(day, temps, cumulative_germ, total_seeds) %>%
  mutate(temps=as.factor(temps),
         prop.germ=cumulative_germ/total_seeds)

predresults %>% select(-lambda, -probs) %>%
  mutate(temps=factor(temps, labels=as.character(sort(unique(germ.stdi$temps))))) %>%
  ggplot(aes(x=day,y=mu,color=temps,group=temps)) +
  geom_line() +
  geom_point(aes(y=prop.germ), data=stdi.plot)

Poor!

brms

need to set up indicator for censoring.

germ.stdi <- germ.stdi %>%
  mutate(cens=ifelse(germ==0, "right", "none"),
         tempsc=as.character(temps) %>% str_pad(width=2, pad="0"))
germ.stdi
get_prior(day | cens(cens) ~ 0 + tempsc, family = exponential, data=germ.stdi)
m1.2 <- brm(day | cens(cens) ~ 0 + tempsc,
            family = exponential(),
            set_prior("normal(0,1)", class="b"),
            data = germ.stdi)
Compiling Stan program...
recompiling to avoid crashing R session
Start sampling

SAMPLING FOR MODEL '1de05645edb74de0b2dc64c80aecacfa' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 9.9e-05 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.99 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 0.42101 seconds (Warm-up)
Chain 1:                0.460126 seconds (Sampling)
Chain 1:                0.881136 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL '1de05645edb74de0b2dc64c80aecacfa' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 6.5e-05 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.65 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 0.466163 seconds (Warm-up)
Chain 2:                0.47715 seconds (Sampling)
Chain 2:                0.943313 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL '1de05645edb74de0b2dc64c80aecacfa' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 6.8e-05 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.68 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 0.473642 seconds (Warm-up)
Chain 3:                0.47086 seconds (Sampling)
Chain 3:                0.944502 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL '1de05645edb74de0b2dc64c80aecacfa' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 6.6e-05 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.66 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 0.503759 seconds (Warm-up)
Chain 4:                0.523177 seconds (Sampling)
Chain 4:                1.02694 seconds (Total)
Chain 4: 
summary(m1.2)
 Family: exponential 
  Links: mu = log 
Formula: day | cens(cens) ~ 0 + tempsc 
   Data: germ.stdi (Number of observations: 396) 
Samples: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup samples = 4000

Population-Level Effects: 
         Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS
tempsc05     5.45      0.37     4.78     6.24 1.00     7283
tempsc10     4.70      0.29     4.17     5.28 1.00     6843
tempsc15     4.03      0.22     3.62     4.48 1.00     6509
tempsc20     2.48      0.16     2.18     2.81 1.00     7217
tempsc25     3.07      0.17     2.75     3.43 1.00     6765
tempsc30     3.73      0.20     3.36     4.15 1.00     7103
tempsc35     5.21      0.36     4.55     5.97 1.00     7226
tempsc40     5.62      0.42     4.87     6.50 1.00     7151
         Tail_ESS
tempsc05     2754
tempsc10     3144
tempsc15     2931
tempsc20     2363
tempsc25     2900
tempsc30     2929
tempsc35     2836
tempsc40     3045

Samples were drawn using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).
predict(m1.2)
        Estimate Est.Error      Q2.5      Q97.5
  [1,] 245.48593 276.28896 5.9395461  984.04837
  [2,] 242.23213 286.87218 6.3819171  981.61491
  [3,] 250.86022 280.25454 6.2885997 1030.22800
  [4,] 256.28935 298.34422 5.4748918 1055.33296
  [5,] 248.88268 273.76517 6.6232319  988.98714
  [6,] 246.82111 278.91867 5.3930423 1021.29342
  [7,] 243.12527 291.87037 5.3151713 1025.31298
  [8,] 256.35498 319.37579 5.4280865 1078.32047
  [9,] 244.69604 281.22146 6.3181035  965.23688
 [10,] 242.94745 269.50746 6.0589810  958.61646
 [11,] 243.71065 285.36180 5.1490135  965.90443
 [12,] 247.56716 285.13194 5.8712287 1020.82856
 [13,] 246.98604 275.66611 5.5412536  966.18487
 [14,] 254.29935 288.60175 6.3300574 1093.20875
 [15,] 247.02742 282.59781 5.4030324 1008.83347
 [16,] 244.91603 284.27206 5.9675904 1073.91207
 [17,] 245.47451 291.84292 5.2768930 1030.51216
 [18,] 256.28734 305.30305 6.1820492 1084.92018
 [19,] 248.62687 281.80476 4.9746392  978.54410
 [20,] 251.40590 303.75442 5.9242455 1025.25218
 [21,] 252.65827 294.22591 5.9554948 1079.53751
 [22,] 241.51647 275.99260 5.8766274  966.05036
 [23,] 256.69938 304.29105 4.6867054 1090.36947
 [24,] 246.62537 283.05863 5.7386168 1005.95275
 [25,] 242.68398 270.09111 6.2452575  962.14755
 [26,] 254.38558 298.94829 5.4984008 1059.15621
 [27,] 256.35117 286.76691 6.0886817 1057.61256
 [28,] 256.03642 298.19747 5.0078311 1046.89710
 [29,] 247.45215 288.56796 5.8943048 1033.55964
 [30,] 252.64081 295.91125 7.0697643 1045.08452
 [31,] 242.17832 287.48779 5.8621026 1007.05478
 [32,] 251.27731 303.24875 4.8496529 1094.51168
 [33,] 242.05339 262.85353 5.6578734  973.87055
 [34,] 247.02294 289.04454 5.4853711 1002.53737
 [35,] 248.73554 289.38143 5.1943806  978.48954
 [36,] 247.32520 277.84628 5.1071152  972.31952
 [37,] 249.40900 281.18578 6.8511095 1008.52525
 [38,] 248.35385 273.85119 5.5533685 1007.28173
 [39,] 250.33563 286.24177 5.5894738 1027.68718
 [40,] 249.17018 288.21880 5.0940516 1006.20893
 [41,] 244.94879 275.14227 5.6177052  985.13202
 [42,] 243.79688 277.08250 4.7775950  999.48099
 [43,] 251.91561 295.55931 6.0263582 1006.79203
 [44,] 245.96190 300.40069 5.6394842 1010.78110
 [45,] 242.90806 276.39650 6.0778921  997.20575
 [46,] 250.88947 288.24884 5.6993464 1004.15118
 [47,] 250.97006 299.34549 5.8488313 1032.85560
 [48,] 248.90594 280.92202 5.5527389  995.46031
 [49,] 246.86875 277.52420 5.0680317  999.90985
 [50,] 249.39435 301.81913 6.1553365 1060.86406
 [51,] 116.42709 125.02637 2.6298899  449.33866
 [52,] 113.63849 125.44245 3.0419538  464.71137
 [53,] 113.93168 123.68780 2.3521552  433.78185
 [54,] 115.56201 129.21914 2.6559443  478.11312
 [55,] 116.17197 128.45257 2.6280502  468.60746
 [56,] 112.89060 121.03747 2.3540797  434.58423
 [57,] 115.45180 125.93123 2.3165299  454.85341
 [58,] 112.53145 121.31404 3.0402194  426.15949
 [59,] 114.83847 126.72001 2.3337074  457.04071
 [60,] 111.46362 121.84619 2.3266890  435.51012
 [61,] 114.22104 124.28832 2.3368979  464.19399
 [62,] 114.15659 124.58826 2.8116434  445.14922
 [63,] 115.97564 127.19037 2.8925949  449.77306
 [64,] 111.54293 119.04260 2.6237273  446.25595
 [65,] 115.86622 123.19665 2.7749341  435.42867
 [66,] 115.92553 130.04278 2.6714747  469.55324
 [67,] 112.62361 127.19325 2.8382987  449.90990
 [68,] 113.93452 122.06265 2.6708583  435.12227
 [69,] 117.04914 124.70631 2.5844279  439.06509
 [70,] 111.48485 123.11116 2.6485693  438.85914
 [71,] 115.19836 126.31414 3.1844907  445.74560
 [72,] 112.49195 120.24910 2.1730865  432.91628
 [73,] 114.59751 127.92288 2.3677550  475.35243
 [74,] 112.76485 122.81528 2.8601888  449.90822
 [75,] 114.24424 124.24838 2.8648150  469.74226
 [76,] 114.62245 123.89809 2.9784959  443.11875
 [77,] 116.62826 125.56486 2.7467017  464.55062
 [78,] 115.73272 128.46599 2.2552341  463.05414
 [79,] 111.61570 120.58389 2.9771549  443.04237
 [80,] 113.46259 120.50067 3.3437554  441.44535
 [81,] 117.52116 129.91669 2.2667302  476.10299
 [82,] 113.66269 121.03594 2.5433457  442.68811
 [83,] 114.35423 125.96706 2.4638107  454.88425
 [84,] 114.68713 122.80515 2.8345383  453.24988
 [85,] 112.59472 124.91525 2.9328534  433.67894
 [86,] 112.57567 119.42029 2.9929144  444.28300
 [87,] 112.90195 125.44869 2.6154545  456.72152
 [88,] 114.25701 120.11748 2.7281852  437.37575
 [89,] 117.11534 126.56821 2.9045228  457.96396
 [90,] 115.61370 125.90525 2.8037275  453.81259
 [91,] 111.93322 123.71905 3.0032645  450.11427
 [92,] 112.56501 121.29035 2.7475201  453.33714
 [93,] 112.68844 126.31726 2.6989368  465.72298
 [94,] 113.78460 122.73946 2.4229704  457.65045
 [95,] 116.04001 123.34659 2.5215464  447.10920
 [96,] 114.53202 128.87126 3.2120991  491.89470
 [97,] 114.17967 123.18225 2.6176020  453.54792
 [98,] 113.12574 120.47428 2.9509864  442.45547
 [99,] 115.10766 121.10969 2.5113456  452.32077
[100,] 112.78083 125.00773 3.1270061  433.70837
[101,]  57.63099  59.66212 1.3642004  226.09131
[102,]  59.05578  63.23544 1.4056917  232.06212
[103,]  58.77228  62.72626 1.4568177  232.12854
[104,]  59.11571  62.31879 1.8554721  222.73568
[105,]  59.44657  61.57707 1.4100904  225.88051
[106,]  57.32238  59.98504 1.7006431  217.10223
[107,]  56.62613  58.42773 1.3340094  209.13180
[108,]  57.05942  60.18789 1.3214589  218.17529
[109,]  57.50874  60.71534 1.3427030  214.80432
[110,]  56.33346  59.34278 1.2430267  216.72809
[111,]  57.72113  59.90206 1.6187639  224.14241
[112,]  58.26986  59.48349 1.3720882  220.90318
[113,]  58.98528  60.72138 1.3301597  228.87270
[114,]  58.70049  60.48541 1.3299360  228.38187
[115,]  56.87642  59.89586 1.2907870  211.37613
[116,]  58.34582  60.82773 1.4837262  216.59297
[117,]  57.41657  61.67175 1.5557991  223.44237
[118,]  59.18673  64.23371 1.3165407  232.69828
[119,]  58.05140  61.03019 1.3725890  217.04451
[120,]  56.95254  60.32751 1.6699785  218.23965
[121,]  57.82121  61.84178 1.3051543  225.64270
[122,]  56.92457  58.99305 1.4723505  211.20549
[123,]  57.31353  60.00953 1.4192490  224.40699
[124,]  57.69584  61.66376 1.3790741  232.03066
[125,]  58.04456  62.10834 1.4738002  224.07319
[126,]  56.34597  59.48198 1.3550501  217.61543
[127,]  55.69645  59.50130 1.4922216  215.23117
[128,]  57.95499  59.51879 1.4422915  216.66452
[129,]  57.40621  59.16712 1.2451690  217.87439
[130,]  57.01165  58.92638 1.4908222  217.63205
[131,]  56.13498  58.76407 1.3997760  216.86633
[132,]  57.85418  59.39507 1.4852188  212.13659
[133,]  57.42860  61.14445 1.4770350  223.14657
[134,]  57.32939  61.30402 1.4128366  224.02161
[135,]  58.10253  61.27247 1.5706987  220.32951
[136,]  56.90775  60.31803 0.9773261  222.41015
[137,]  58.32984  61.38570 1.1836468  221.92879
[138,]  57.33881  59.14774 1.3703697  215.25353
[139,]  58.85315  61.53883 1.4530571  229.26195
[140,]  57.99232  59.94806 1.4645695  218.69108
[141,]  58.04516  61.21441 1.2865330  217.61388
[142,]  58.96994  60.59586 1.5086629  220.76435
[143,]  58.18031  61.59890 1.5524734  217.74792
[144,]  57.68485  60.69617 1.2917728  219.68442
[145,]  57.81767  60.50537 1.4748431  218.07158
[146,]  56.79608  59.12409 1.2751674  219.04968
[147,]  56.86705  58.68100 1.4605452  211.40657
[148,]  56.11103  56.83769 1.4429440  211.44768
[149,]  57.19561  62.06577 1.2487454  220.06163
[150,]  57.49533  61.84837 1.4074659  228.49997
[151,]  12.27197  12.52056 0.3000664   46.20609
[152,]  12.25248  12.85818 0.3276078   45.63628
[153,]  12.09528  12.21395 0.3110200   44.69911
[154,]  11.94567  12.37328 0.2981404   45.83071
[155,]  12.11949  12.33247 0.3697251   46.92530
[156,]  12.21601  12.50532 0.3463409   46.88081
[157,]  11.98068  12.12894 0.2755247   45.38906
[158,]  12.42094  12.55827 0.3399742   46.87070
[159,]  12.33380  12.72504 0.2947922   47.21595
[160,]  12.36624  12.93998 0.3119964   47.76821
[161,]  12.16164  12.50003 0.2826861   47.58617
[162,]  12.14625  12.52961 0.2652972   47.67175
[163,]  12.29396  12.89630 0.3189234   47.12113
[164,]  12.44721  12.97298 0.3587818   47.51136
[165,]  11.94930  12.46674 0.2742760   45.78627
[166,]  12.07054  12.26765 0.3453038   44.94155
[167,]  12.30762  12.78541 0.2664174   46.15585
[168,]  12.05743  12.72837 0.2875486   48.31410
[169,]  12.28344  12.61157 0.2768650   46.16698
[170,]  12.44382  12.85329 0.2584302   47.54244
[171,]  11.96851  12.50439 0.2699489   46.22350
[172,]  12.17126  12.50404 0.3201044   44.82649
[173,]  12.14513  12.79836 0.2883293   46.42165
[174,]  12.30985  12.33835 0.2963095   46.15442
[175,]  11.72938  12.24362 0.2825821   45.86416
[176,]  12.08493  12.46260 0.3117071   44.79569
[177,]  12.18555  12.56084 0.3111835   47.09898
[178,]  11.74684  12.36954 0.2692384   45.73838
[179,]  11.99600  12.32803 0.2744868   45.39835
[180,]  11.97738  12.17230 0.3064405   44.94598
[181,]  11.88514  12.37865 0.2563202   47.55534
[182,]  12.15174  12.40544 0.3119910   45.37152
[183,]  12.10761  12.42792 0.3310389   47.09083
[184,]  12.07005  12.30723 0.2857883   44.53654
[185,]  11.94025  12.22609 0.3391099   45.45943
[186,]  12.13459  12.24915 0.3095549   44.80459
[187,]  12.14158  12.65101 0.2941841   46.99871
[188,]  12.36558  12.35434 0.2780419   45.84792
[189,]  11.96528  12.07088 0.2997328   42.73265
[190,]  11.63004  11.79656 0.2874066   44.32265
[191,]  11.91186  12.18271 0.3527353   46.01596
[192,]  12.04230  12.53997 0.2211857   46.12638
[193,]  12.15930  12.47965 0.2561586   46.00510
[194,]  12.15224  12.54451 0.3031508   47.86089
[195,]  12.15352  11.90523 0.3557551   44.02755
[196,]  11.93575  12.67950 0.2849475   47.09558
[197,]  11.98022  12.31339 0.3142566   44.85054
[198,]  11.80354  11.79271 0.2549864   43.84045
[199,]  12.07176  12.79408 0.3226481   45.52612
[200,]  12.24248  12.30320 0.2851709   44.66132
[201,]  22.04160  23.04623 0.5043869   81.98828
[202,]  21.31700  21.95934 0.5322734   79.57782
[203,]  21.22990  21.81416 0.4627612   83.08412
[204,]  22.11949  22.83367 0.5996306   84.43089
[205,]  20.86526  21.28643 0.4086979   78.75142
[206,]  21.96546  22.62555 0.5267420   80.85931
[207,]  21.69990  22.21870 0.5630515   80.79460
[208,]  22.18674  23.14638 0.4884109   86.47765
[209,]  21.81592  22.45872 0.4727375   83.90968
[210,]  21.92559  23.62373 0.4978565   82.90363
[211,]  21.85272  23.17457 0.5059772   83.57458
[212,]  21.81922  22.56560 0.5749176   80.99253
[213,]  22.03872  22.89834 0.4929068   82.21258
[214,]  21.85500  22.56638 0.6078992   83.41186
[215,]  22.00447  22.59749 0.5517704   81.55421
[216,]  21.68894  22.81028 0.5028765   80.31359
[217,]  21.88440  22.33302 0.6528877   80.73556
[218,]  22.20987  23.52346 0.5992538   86.05838
[219,]  21.88431  22.17265 0.5263770   81.98585
[220,]  22.04164  22.75105 0.5335715   84.99743
[221,]  22.19309  23.30873 0.5162106   83.02494
[222,]  22.11350  22.74777 0.5126093   84.58248
[223,]  21.71797  22.33849 0.5257877   82.99858
[224,]  21.89624  22.31395 0.4476239   82.51471
[225,]  21.66672  22.27826 0.4446712   80.89080
[226,]  22.10054  22.93627 0.5872382   81.84553
[227,]  21.75586  22.24162 0.5618915   83.20058
[228,]  21.75806  22.29446 0.5598590   81.85606
[229,]  21.73183  23.38546 0.4583894   85.85994
[230,]  22.14844  22.88615 0.4723619   81.94019
[231,]  21.99403  22.52277 0.5745250   81.74375
[232,]  21.82119  22.19277 0.5981499   79.89591
[233,]  22.17150  23.16092 0.5058090   83.76954
[234,]  21.58653  22.25207 0.5767341   81.06073
[235,]  21.30447  22.77354 0.4980191   81.76521
[236,]  21.93257  22.39044 0.5154882   82.09108
[237,]  21.92902  22.49633 0.5154218   81.93647
[238,]  22.14130  23.25028 0.5415127   84.53434
[239,]  21.42871  21.80453 0.4589648   79.13383
[240,]  21.24644  22.13492 0.6175919   78.62797
[241,]  22.20943  22.96188 0.5465539   84.25448
[242,]  21.91515  23.22055 0.4797306   83.72724
[243,]  21.67557  22.46244 0.5572388   82.31895
[244,]  22.09432  22.23950 0.5465813   81.12303
[245,]  22.21201  23.07999 0.5164889   82.35871
[246,]  21.65638  21.67098 0.6404132   78.65637
[247,]  22.46027  23.38667 0.5119792   88.21120
[248,]  21.25171  21.41397 0.5626897   79.33482
[249,]  22.12569  22.18624 0.6197464   78.56397
[250,]  22.00550  22.35773 0.6558222   82.15761
 [ reached getOption("max.print") -- omitted 146 rows ]

what are these? the average number of days?

predict(m1.2, newdata = expand_grid(tempsc=unique(germ.stdi$tempsc), cens=c("none", "right")))
       Estimate Est.Error      Q2.5      Q97.5
 [1,] 240.09882 269.85968 5.3179532  982.95309
 [2,] 244.62441 283.73613 5.2897528 1005.21999
 [3,] 116.60192 127.79306 2.5642821  460.75439
 [4,] 113.52385 124.52303 2.6162861  467.98802
 [5,]  56.38554  58.31381 1.5015238  211.15345
 [6,]  56.49888  62.03551 1.3771767  217.75420
 [7,]  11.99098  12.02709 0.3321773   44.37220
 [8,]  12.05731  12.51547 0.3234881   46.03726
 [9,]  21.68842  22.33117 0.5812621   81.15107
[10,]  21.46086  22.11309 0.6243808   81.94992
[11,]  43.68283  45.68467 1.0277879  171.34936
[12,]  43.82913  45.34232 0.9076824  162.16085
[13,] 196.44042 225.68336 5.3083119  804.33538
[14,] 192.05273 227.49838 4.4394243  792.41301
[15,] 294.29945 350.13205 5.1195876 1251.17256
[16,] 295.16459 366.09766 6.1149232 1224.97806

again, these are log(mean time to germination)

lambda should be 1/mu, so

plot(1:28,pexp(1:28, 1/exp(2.48)), type="l", col="red")
lines(1:28,pexp(1:28, 1/exp(5.60)), type="l", col="blue")

get_prior( day | cens(cens) ~ 0 + tempsc,
            family = Gamma(),
            data = germ.stdi)
m1.3 <- brm(day | cens(cens) ~ 0 + tempsc,
            family = Gamma(),
            set_prior("normal(0,1)", class="b", lb=0),
            data = germ.stdi)
Compiling Stan program...
Start sampling

SAMPLING FOR MODEL '77de82ec887792111b6580f2af527efa' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.000681 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 6.81 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 5.00816 seconds (Warm-up)
Chain 1:                3.3331 seconds (Sampling)
Chain 1:                8.34126 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL '77de82ec887792111b6580f2af527efa' NOW (CHAIN 2).
Chain 2: Rejecting initial value:
Chain 2:   Log probability evaluates to log(0), i.e. negative infinity.
Chain 2:   Stan can't start sampling from this initial value.
Chain 2: 
Chain 2: Gradient evaluation took 0.000384 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 3.84 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 4.64764 seconds (Warm-up)
Chain 2:                3.13702 seconds (Sampling)
Chain 2:                7.78466 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL '77de82ec887792111b6580f2af527efa' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0.000434 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 4.34 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 4.86019 seconds (Warm-up)
Chain 3:                3.37768 seconds (Sampling)
Chain 3:                8.23787 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL '77de82ec887792111b6580f2af527efa' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 0.000601 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 6.01 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 4.50939 seconds (Warm-up)
Chain 4:                3.45949 seconds (Sampling)
Chain 4:                7.96889 seconds (Total)
Chain 4: 
summary(m1.3)
 Family: gamma 
  Links: mu = inverse; shape = identity 
Formula: day | cens(cens) ~ 0 + tempsc 
   Data: germ.stdi (Number of observations: 396) 
Samples: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup samples = 4000

Population-Level Effects: 
         Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS
tempsc05     0.00      0.00     0.00     0.00 1.00     4256
tempsc10     0.00      0.00     0.00     0.01 1.00     4607
tempsc15     0.01      0.00     0.00     0.02 1.00     4950
tempsc20     0.07      0.02     0.05     0.11 1.00     6372
tempsc25     0.04      0.01     0.02     0.06 1.00     6371
tempsc30     0.02      0.00     0.01     0.03 1.00     5439
tempsc35     0.00      0.00     0.00     0.00 1.00     4838
tempsc40     0.00      0.00     0.00     0.00 1.00     4878
         Tail_ESS
tempsc05     2391
tempsc10     2976
tempsc15     2803
tempsc20     3075
tempsc25     2871
tempsc30     2842
tempsc35     2814
tempsc40     2330

Family Specific Parameters: 
      Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS
shape     0.68      0.06     0.56     0.81 1.00     3512
      Tail_ESS
shape     2905

Samples were drawn using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).
newdata <- expand_grid(tempsc=unique(germ.stdi$tempsc), cens=c("none", "right"))
cbind(newdata, predict(m1.3, newdata = newdata))
LS0tCnRpdGxlOiAiZ2VybWluYXRpb24iCm91dHB1dDogaHRtbF9ub3RlYm9vawotLS0KCmBgYHtyfQpsaWJyYXJ5KHJldGhpbmtpbmcpCmxpYnJhcnkoYnJtcykKbGlicmFyeSh0aWR5dmVyc2UpCmBgYAoKYGBge3J9Cmdlcm0gPC0gcmVhZF9jc3YoIi4uL0RpbWVuc2lvbnMvaHlkcm90aGVybWFsLWFsbC1zcGVjaWVzL2RhdGEvbGlnaHRfcm91bmQxX3RhbGwuY3N2IikgJT4lIGZpbHRlcih3cHM9PTApICU+JQogIHNlbGVjdChwb3BzLCB0ZW1wcywgdG90YWxfc2VlZHMsIGdlcm0sIGRheSwgY3VtdWxhdGl2ZV9nZXJtKQpnZXJtCmBgYAoKd2hhdCBpZiBuZWVkIG9uZSBldmVudCBwZXIgcm93OgoKYGBge3J9Cm9uZV9wZXJfcm93IDwtIGZ1bmN0aW9uKGRmKSB7CiAgdG90YWxfc2VlZCA8LSBtYXgoZGYkdG90YWxfc2VlZHMsIHN1bShkZiRnZXJtKSkKICBuZXdkYXRhIDwtIHRpYmJsZShpZD0xOnRvdGFsX3NlZWQsIGdlcm09MCwgZGF5PW1heChkZiRkYXkpKQogIGRmIDwtIGRmICU+JSBmaWx0ZXIoZ2VybT4wKQogIGNvdW50IDwtIDEKICBpZiAobnJvdyhkZikgPiAwKSB7CiAgICBmb3IgKGkgaW4gMTpucm93KGRmKSkgeyAjIHdlIGxvb2sgYXQgZWFjaCByb3cgb2YgdGhlIGRmIHdoZXJlIGdlcm1pbmF0aW9uIG9jY3VyZWQKICAgICAgZm9yIChqIGluIDE6ZGYkZ2VybVtpXSkgeyAjIG5vdyB1cGRhdGUgdGhlIG5ld2RhdGEgdG8gcmVmbGVjdCB0aGUgZ2VybWlhbnRpb24gb2YgZWFjaCBzZWVkCiAgICAgICAgbmV3ZGF0YSRnZXJtW2NvdW50XSA8LSAxCiAgICAgICAgbmV3ZGF0YSRkYXlbY291bnRdPWRmJGRheVtpXQogICAgICAgIGNvdW50IDwtIGNvdW50KzEgIyBjb3VudCBrZWVwcyB0cmFjayBvZiB3aGljaCBpbmRpdmlkdWFsIHdlIGFyZSBhdCBpbiB0aGUgbmV3IGRhdGEKICAgICAgfSAjIGZvciBqCiAgICB9ICMgZm9yIGkKICB9ICMgaWYgCiAgcmV0dXJuKG5ld2RhdGEpCn0KCmdlcm1vbmUgPC0gZ2VybSAlPiUgZ3JvdXBfYnkocG9wcywgdGVtcHMpICU+JQogIHNlbGVjdCgtY3VtdWxhdGl2ZV9nZXJtKSAlPiUgIyBub3QgbmVlZGVkIGluIHRoaXMgZW5jb2RpbmcgKEkgdGhpbmsuLi5pbiBhbnkgY2FzZSB3b3VsZCBuZWVkIHRvIGJlIHJlY2FsY3VsYXRlZCkKICBuZXN0KCkgJT4lCiAgbXV0YXRlKG5ld2RhdGE9bWFwKGRhdGEsIG9uZV9wZXJfcm93KSkgJT4lCiAgc2VsZWN0KC1kYXRhKSAlPiUKICB1bm5lc3QobmV3ZGF0YSkKCmdlcm1vbmUKYGBgCgojIzEKCklzIGVmZmVjdCBvZiB0ZW1wIH5saW5lYXIgb24gY3VtIGdlcm0/CgpgYGB7cn0KZ2VybSAlPiUgZmlsdGVyKHBvcHM9PSJTVERJIiwgZGF5PT0yOCkgJT4lCiAgZ2dwbG90KGFlcyh4PXRlbXBzLHk9Y3VtdWxhdGl2ZV9nZXJtKSkgKwogIGdlb21fY29sKCkKYGBgCm5vLCBzbyB3ZSBjYW4ndCB0cmVhdCB0ZW1wIGFzIGEgY29udGludW91cyBsaW5lYXIgcHJlZGljdG9yCgpgYGB7cn0KZ2VybS5zdGRpIDwtIGdlcm1vbmUgJT4lIGZpbHRlcihwb3BzPT0iU1RESSIpICU+JSBzZWxlY3QoLXBvcHMpCmdlcm0uc3RkaQpgYGAKVHJ5IGEgY2Vuc29yZWQgbGFtYmRhIG1vZGVsCmBgYHtyfQpkIDwtIGxpc3QoZ2VybT1nZXJtLnN0ZGkkZ2VybSwgdGVtcHM9YXMubnVtZXJpYyhhcy5mYWN0b3IoZ2VybS5zdGRpJHRlbXBzKSksIGRheT1nZXJtLnN0ZGkkZGF5KQptMS4xIDwtIHVsYW0oCiAgYWxpc3QoCiAgICBkYXkgfCBnZXJtPT0xIH4gZXhwb25lbnRpYWwoIGxhbWJkYSksCiAgICBkYXkgfCBnZXJtPT0wIH4gY3VzdG9tKGV4cG9uZW50aWFsX2xjY2RmKCAhWSB8IGxhbWJkYSkpLAogICAgbGFtYmRhIDwtIDEuMCAvIG11LAogICAgbG9nKG11KSA8LSBhW3RlbXBzXSwKICAgIGFbdGVtcHNdIH4gbm9ybWFsKDAsMSkpLAogIGRhdGE9ZCwKICBjaGFpbnM9NCwKICBjb3JlcyA9IDQKKQpgYGAKCmBgYHtyfQpwcmVjaXMobTEuMSwgZGVwdGggPSAyKQpgYGAKVGhlIGFib3ZlIHJlcHJlc2VudCBsb2cobWVhbiB0aW1lIHRvIGdlcm1pbmF0aW9uKQoKYGBge3J9CmV4cCg1LjQ1KQpleHAoMi40OCkKYGBgCgoKcG9zdGVyaW9yCmBgYHtyfQpwcmVkZGF0YSA8LSBleHBhbmRfZ3JpZCh0ZW1wcz0xOjgsIGRheT0xOjI4KQpwcmVkIDwtIGxpbmsobTEuMSwgZGF0YSA9IHByZWRkYXRhKQpzdHIocHJlZCkKYGBgCgptdSBpcyBhdmVyYWdlIGRheXMgdG8gZ2VybSwgbGFtYmRhIGlzIHJhdGUgcGFyYW1ldGVyLiAgbmVpdGhlciBjaGFuZ2Ugb3ZlciB0aW1lLCBvZiBjb3Vyc2UsIHNvIGhhdmluZyBkYXlzIGRvZXNuJ3QgcmVhbGx5IG1ha2Ugc2Vuc2UuCgpgYGB7cn0KcHJlZGRhdGEkbXUgPC0gYXBwbHkocHJlZCRtdSwyLG1lYW4pCnByZWRkYXRhJGxvdyA8LSBhcHBseShwcmVkJG11LDIsSFBESSlbMSxdCnByZWRkYXRhJGhpZ2ggPC0gYXBwbHkocHJlZCRtdSwgMiwgSFBESSlbMixdCmBgYAoKU2luZ2xlIHRlbXAuICBEb24ndCBuZWVkIGRheQpwb3N0ZXJpb3IKYGBge3J9CnByZWRkYXRhIDwtIGV4cGFuZF9ncmlkKHRlbXBzPTMpCnByZWQgPC0gbGluayhtMS4xLCBkYXRhID0gcHJlZGRhdGEpCnN0cihwcmVkKQpgYGAKCmhvdyB0byBjb252ZXJ0IHRvIHByb2JzPyB1c2UgcGV4cC4KCmBgYHtyfQpwcmVkcHJvYnMgPC0gcGV4cCgxOjI4LHJhdGU9cHJlZCRsYW1iZGFbMV0pCnBsb3QoeD0xOjI4LHk9cHJlZHByb2JzLCB0eXBlPSJsIikKYGBgCgpldmVuIHRob3VnaHQgaXQgaXNuJ3QgdXNpbmcgZGF5LCBpbmNsdWRpbmcgZGF5IGluIHRoZSBwcmVkaWN0aW9uIGRhdGEgZnJhbWUgd2lsbCBoZWxwIG1lIGtlZXAgZGF0YSBpbiB0aGUgY29ycmVjdCBmb3JtYXQuICBNYXliZS4KCmBgYHtyfQpwcmVkZGF0YSA8LSBleHBhbmRfZ3JpZCh0ZW1wcz0xOjgsIGRheT0xOjI4KQpwcmVkIDwtIGxpbmsobTEuMSwgZGF0YSA9IHByZWRkYXRhKQpzdHIocHJlZCkKYGBgCgpgYGB7cn0KcHJlZHJlc3VsdHMgPC0gcHJlZGRhdGEgJT4lCiAgbXV0YXRlKGxhbWJkYT1hcy5saXN0KGFzLmRhdGEuZnJhbWUocHJlZCRsYW1iZGEpKSkKcHJlZHJlc3VsdHMKYGBgCmBgYHtyfQpwcmVkcmVzdWx0cyA8LSBwcmVkcmVzdWx0cyAlPiUKICBtdXRhdGUocHJvYnM9bWFwMihkYXksIGxhbWJkYSwgfiBwZXhwKC54LCAueSkpLAogICAgICAgICBtdT1tYXBfZGJsKHByb2JzLCBtZWFuKSwKICAgICAgICAgbG93ZXI9bWFwX2RibChwcm9icywgfiBIUERJKC4pWzFdICU+JSB1bmxpc3QoKSksCiAgICAgICAgIHVwcGVyPW1hcF9kYmwocHJvYnMsIH4gSFBESSguKVsyXSkgJT4lIHVubGlzdCgpKQpwcmVkcmVzdWx0cwpgYGAKCgpgYGB7cn0KcHJlZHJlc3VsdHMgJT4lIHNlbGVjdCgtbGFtYmRhLCAtcHJvYnMpICU+JQogIG11dGF0ZSh0ZW1wcz1mYWN0b3IodGVtcHMsIGxhYmVscz1hcy5jaGFyYWN0ZXIoc29ydCh1bmlxdWUoZ2VybS5zdGRpJHRlbXBzKSkpKSkgJT4lCiAgZ2dwbG90KGFlcyh4PWRheSx5PW11LGNvbG9yPXRlbXBzLGdyb3VwPXRlbXBzKSkgKwogIGdlb21fbGluZSgpIApgYGAKCkFkZCByZWFsZGF0YToKCmBgYHtyfQpzdGRpLnBsb3QgPC0gZ2VybSAlPiUgZmlsdGVyKHBvcHM9PSJTVERJIikgJT4lIAogIHNlbGVjdChkYXksIHRlbXBzLCBjdW11bGF0aXZlX2dlcm0sIHRvdGFsX3NlZWRzKSAlPiUKICBtdXRhdGUodGVtcHM9YXMuZmFjdG9yKHRlbXBzKSwKICAgICAgICAgcHJvcC5nZXJtPWN1bXVsYXRpdmVfZ2VybS90b3RhbF9zZWVkcykKCnByZWRyZXN1bHRzICU+JSBzZWxlY3QoLWxhbWJkYSwgLXByb2JzKSAlPiUKICBtdXRhdGUodGVtcHM9ZmFjdG9yKHRlbXBzLCBsYWJlbHM9YXMuY2hhcmFjdGVyKHNvcnQodW5pcXVlKGdlcm0uc3RkaSR0ZW1wcykpKSkpICU+JQogIGdncGxvdChhZXMoeD1kYXkseT1tdSxjb2xvcj10ZW1wcyxncm91cD10ZW1wcykpICsKICBnZW9tX2xpbmUoKSArCiAgZ2VvbV9wb2ludChhZXMoeT1wcm9wLmdlcm0pLCBkYXRhPXN0ZGkucGxvdCkKYGBgCgpQb29yIQoKIyMjIGJybXMKCm5lZWQgdG8gc2V0IHVwIGluZGljYXRvciBmb3IgY2Vuc29yaW5nLgpgYGB7cn0KZ2VybS5zdGRpIDwtIGdlcm0uc3RkaSAlPiUKICBtdXRhdGUoY2Vucz1pZmVsc2UoZ2VybT09MCwgInJpZ2h0IiwgIm5vbmUiKSwKICAgICAgICAgdGVtcHNjPWFzLmNoYXJhY3Rlcih0ZW1wcykgJT4lIHN0cl9wYWQod2lkdGg9MiwgcGFkPSIwIikpCmdlcm0uc3RkaQpgYGAKCmBgYHtyfQpnZXRfcHJpb3IoZGF5IHwgY2VucyhjZW5zKSB+IDAgKyB0ZW1wc2MsIGZhbWlseSA9IGV4cG9uZW50aWFsLCBkYXRhPWdlcm0uc3RkaSkKYGBgCgoKYGBge3J9Cm0xLjIgPC0gYnJtKGRheSB8IGNlbnMoY2VucykgfiAwICsgdGVtcHNjLAogICAgICAgICAgICBmYW1pbHkgPSBleHBvbmVudGlhbCgpLAogICAgICAgICAgICBzZXRfcHJpb3IoIm5vcm1hbCgwLDEpIiwgY2xhc3M9ImIiKSwKICAgICAgICAgICAgZGF0YSA9IGdlcm0uc3RkaSkKYGBgCmBgYHtyfQpzdW1tYXJ5KG0xLjIpCmBgYAoKCmBgYHtyfQpwcmVkaWN0KG0xLjIpCmBgYAp3aGF0IGFyZSB0aGVzZT8gIHRoZSBhdmVyYWdlIG51bWJlciBvZiBkYXlzPwoKYGBge3J9CnByZWRpY3QobTEuMiwgbmV3ZGF0YSA9IGV4cGFuZF9ncmlkKHRlbXBzYz11bmlxdWUoZ2VybS5zdGRpJHRlbXBzYyksIGNlbnM9Yygibm9uZSIsICJyaWdodCIpKSkKYGBgCgoKYGBge3J9CnByZWNpcyhtMS4xLCBkZXB0aCA9IDIpCmBgYAphZ2FpbiwgdGhlc2UgYXJlIGxvZyhtZWFuIHRpbWUgdG8gZ2VybWluYXRpb24pCgpsYW1iZGEgc2hvdWxkIGJlIDEvbXUsIHNvCgpgYGB7cn0KcGxvdCgxOjI4LHBleHAoMToyOCwgMS9leHAoMi40OCkpLCB0eXBlPSJsIiwgY29sPSJyZWQiKQpsaW5lcygxOjI4LHBleHAoMToyOCwgMS9leHAoNS42MCkpLCB0eXBlPSJsIiwgY29sPSJibHVlIikKYGBgCgoKYGBge3J9CmdldF9wcmlvciggZGF5IHwgY2VucyhjZW5zKSB+IDAgKyB0ZW1wc2MsCiAgICAgICAgICAgIGZhbWlseSA9IEdhbW1hKCksCiAgICAgICAgICAgIGRhdGEgPSBnZXJtLnN0ZGkpCmBgYAoKCmBgYHtyfQptMS4zIDwtIGJybShkYXkgfCBjZW5zKGNlbnMpIH4gMCArIHRlbXBzYywKICAgICAgICAgICAgZmFtaWx5ID0gR2FtbWEoKSwKICAgICAgICAgICAgc2V0X3ByaW9yKCJub3JtYWwoMCwxKSIsIGNsYXNzPSJiIiwgbGI9MCksCiAgICAgICAgICAgIGRhdGEgPSBnZXJtLnN0ZGkpCmBgYAoKYGBge3J9CnN1bW1hcnkobTEuMykKYGBgCgpgYGB7cn0KbmV3ZGF0YSA8LSBleHBhbmRfZ3JpZCh0ZW1wc2M9dW5pcXVlKGdlcm0uc3RkaSR0ZW1wc2MpLCBjZW5zPWMoIm5vbmUiLCAicmlnaHQiKSkKY2JpbmQobmV3ZGF0YSwgcHJlZGljdChtMS4zLCBuZXdkYXRhID0gbmV3ZGF0YSkpCmBgYAoK